In [1]:
import json
import copy
import time
import random
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
from torchsummary import summary
In [2]:
from nmfd_gnn import NMFD_GNN

1: set parameters¶

In [3]:
print (torch.cuda.is_available())
device = torch.device("cuda:0")
random_seed = 42
random.seed(random_seed)
torch.manual_seed(random_seed)
torch.cuda.manual_seed(random_seed)
r = random.random
True
In [4]:
#1.1: settings
M = 20                       #number of time interval in a window
missing_ratio = 0.50
file_name = "m_" + str(M) + "_missing_" + str(int(missing_ratio*100))
print (file_name)

#1.2: hyperparameters
num_epochs, batch_size, learning_rate = 200, 16, 0.001
beta_flow, beta_occ, beta_phy = 1.0, 1.0, 0.1
batch_size_vt = 16  #batch size for evaluation and test

hyper = {"n_e": num_epochs, "b_s": batch_size, "b_s_vt": batch_size_vt, "l_r": learning_rate,\
         "beta_f": beta_flow, "beta_o": beta_occ, "beta_p": beta_phy}

gnn_dim_1, gnn_dim_2, gnn_dim_3, lstm_dim = 2, 128, 128, 128
p_dim = 10    #column dimension of L1, L2
c_k = 5.5     #meter, the sum of loop width and uniform vehicle length. based on Gero and Daganzo 2008.
theta_ini = [-2.879, 5.207, -2.473, 1.722, 3.619] 
 

hyper_model = {"g_dim_1": gnn_dim_1, "g_dim_2": gnn_dim_2, "g_dim_3": gnn_dim_3, "l_dim": lstm_dim,\
               "p_dim": p_dim, "c_k": c_k, "theta_ini": theta_ini}
max_no_decrease = 30

#1.3: set paths
root_path = "/home/umni2/a/umnilab/users/xue120/umni4/2023_mfd_traffic_london/"
file_path = root_path + "2_prepare_data/" + file_name + "/"
train_path, vali_path, test_path =\
    file_path + "train.json", file_path + "vali.json", file_path + "test.json"
sensor_id_path = file_path + "sensor_id_order.json"
sensor_adj_path = file_path + "sensor_adj.json"
mean_std_path = file_path + "mean_std.json"
m_20_missing_50

2: visualization¶

In [5]:
def visualize_train_loss(total_phy_flow_occ_loss):
    plt.figure(figsize=(4,3), dpi=75)
    t_p_f_o_l = np.array(total_phy_flow_occ_loss)
    e_loss, p_loss, f_loss, o_loss = t_p_f_o_l[:,0], t_p_f_o_l[:,1], t_p_f_o_l[:,2], t_p_f_o_l[:,3]
    x = range(len(e_loss))
    plt.plot(x, p_loss, linewidth=1, label = "phy loss")
    plt.plot(x, f_loss, linewidth=1, label = "flow loss")
    plt.plot(x, o_loss, linewidth=1, label = "occ loss")
    plt.legend()
    plt.title('Loss decline on train')
    plt.xlabel('Epoch')
    plt.ylabel('Loss')
    plt.savefig(file_name + '/' + 'train_loss.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_flow_loss(vali_f_mae, test_f_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_f_mae))    
    plt.plot(x, vali_f_mae, linewidth=1, label="Validate")
    plt.plot(x, test_f_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of flow on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE (veh/h)')
    plt.savefig(file_name + '/' + 'flow_mae.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_occ_loss(vali_o_mae, test_o_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_o_mae))    
    plt.plot(x, vali_o_mae, linewidth=1, label="Validate")
    plt.plot(x, test_o_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of occupancy on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE')
    plt.savefig(file_name + '/' + 'occ_mae.png',bbox_inches = 'tight')
    plt.show()

3: compute the error¶

In [6]:
def MAELoss(yhat, y):
    return float(torch.mean(torch.div(torch.abs(yhat-y), 1)))

def RMSELoss(yhat, y):
    return float(torch.sqrt(torch.mean((yhat-y)**2)))

def vali_test(model, f, f_mask, o, o_mask, f_o_mean_std, b_s_vt):    
    flow_std, occ_std, n = f_o_mean_std[1], f_o_mean_std[3], len(f)
    f_mae_list, f_rmse_list, o_mae_list, o_rmse_list, num_list = list(), list(), list(), list(), list()
    for i in range(0, n, b_s_vt):
        s, e = i, np.min([i+b_s_vt, n])
        num_list.append(e-s)
        bf, bo, bf_mask, bo_mask = f[s: e], o[s: e], f_mask[s: e], o_mask[s: e]  
        bf_hat, bo_hat, bq_hat, bq_theta = model.run(bf_mask, bo_mask)
        bf_hat, bo_hat = bf_hat.cpu(), bo_hat.cpu()
        bf_mae, bf_rmse = MAELoss(bf_hat, bf)*flow_std, RMSELoss(bf_hat, bf)*flow_std
        bo_mae, bo_rmse = MAELoss(bo_hat, bo)*occ_std, RMSELoss(bo_hat, bo)*occ_std
        f_mae_list.append(bf_mae)
        f_rmse_list.append(bf_rmse)
        o_mae_list.append(bo_mae)
        o_rmse_list.append(bo_rmse)
    f_mae, o_mae = np.dot(f_mae_list, num_list)/n, np.dot(o_mae_list, num_list)/n
    f_rmse = np.sqrt(np.dot(np.multiply(f_rmse_list, f_rmse_list), num_list)/n)
    o_rmse = np.sqrt(np.dot(np.multiply(o_rmse_list, o_rmse_list), num_list)/n)
    return f_mae, f_rmse, o_mae, o_rmse

def evaluate(model, vt_f, vt_o, vt_f_m, vt_o_m, f_o_mean_std, b_s_vt): #vt: vali_test
    vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse  =\
         vali_test(model, vt_f, vt_f_m, vt_o, vt_o_m, f_o_mean_std, b_s_vt)
    return vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse

4: train¶

In [7]:
#4.1: one training epoch
def train_epoch(model, opt, criterion, train_f_x, train_f_y, train_o_x, train_o_y, hyper, flow_std_squ): 
    #f: flow; o: occupancy
    model.train()
    losses, p_losses, f_losses, o_losses = list(), list(), list(), list()
    
    beta_f, beta_o, beta_p, b_s = hyper["beta_f"], hyper["beta_o"], hyper["beta_p"], hyper["b_s"]
    n = len(train_f_x)
    print ("# batch: ", int(n/b_s))   
    
    for i in range(0, n-b_s, b_s):
        time1 = time.time()
        x_f_batch, y_f_batch = train_f_x[i: i+b_s], train_f_y[i: i+b_s]   
        x_o_batch, y_o_batch = train_o_x[i: i+b_s], train_o_y[i: i+b_s]

        opt.zero_grad() 
        y_f_hat, y_o_hat, q_hat, q_theta = model.run(x_f_batch, x_o_batch)
        
        p_loss = criterion(q_hat, q_theta).cpu()                #physical loss 
        p_loss = p_loss/flow_std_squ
        
        f_loss = criterion(y_f_hat.cpu(), y_f_batch)              #data loss of flow
        o_loss = criterion(y_o_hat.cpu(), y_o_batch)              #data loss of occupancy
        
        loss = beta_f*f_loss + beta_o*o_loss + beta_p*p_loss
        
        loss.backward()
        opt.step()
        losses.append(loss.data.numpy())
        p_losses.append(p_loss.data.numpy())
        f_losses.append(f_loss.data.numpy())
        o_losses.append(o_loss.data.numpy())
        
        if i % (64*b_s) == 0:
            print ("i_batch: ", i/b_s)
            print ("the loss for this batch: ", loss.data.numpy())
            print ("flow loss", f_loss.data.numpy())
            print ("occ loss", o_loss.data.numpy())
            time2 = time.time()
            print ("time for this batch", time2-time1)
            print ("----------------------------------")
        n_loss = float(len(losses)+0.000001)
        aver_loss = sum(losses)/n_loss
        aver_p_loss = sum(p_losses)/n_loss
        aver_f_loss = sum(f_losses)/n_loss
        aver_o_loss = sum(o_losses)/n_loss
    return aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss

#4.2: all train epochs
def train_process(model, criterion, train, vali, test, hyper, f_o_mean_std):
    total_phy_flow_occ_loss = list()
    
    n_mse_flow_occ = 0 #mse(flow) + mse(occ) for validation sets.
    
    vali_f, vali_o = vali["flow"], vali["occupancy"] 
    vali_f_m, vali_o_m = vali["flow_mask"].to(device), vali["occupancy_mask"].to(device) 
    test_f, test_o = test["flow"], test["occupancy"] 
    test_f_m, test_o_m = test["flow_mask"].to(device), test["occupancy_mask"].to(device) 
    
    l_r, n_e = hyper["l_r"], hyper["n_e"]
    opt = optim.Adam(model.parameters(), l_r, betas = (0.9,0.999), weight_decay=0.0001)
    opt_scheduler = torch.optim.lr_scheduler.MultiStepLR(opt, milestones=[150])
    
    print ("# epochs ", n_e)
    r_vali_f_mae, r_vali_o_mae, r_test_f_mae, r_test_o_mae = list(), list(), list(), list()
    r_vali_f_rmse, r_vali_o_rmse, r_test_f_rmse, r_test_o_rmse = list(), list(), list(), list()
    
    flow_std_squ = np.power(f_o_mean_std[1], 2)
    
    no_decrease = 0
    for i in range(n_e):
        print ("----------------an epoch starts-------------------")
        #time1_s = time.time()
        
        time_s = time.time()
        print ("i_epoch: ", i)
        n_train = len(train["flow"])
        number_list = copy.copy(list(range(n_train)))
        random.shuffle(number_list, random = r)
        shuffle_idx = torch.tensor(number_list)
        train_x_f, train_y_f = train["flow_mask"][shuffle_idx], train["flow"][shuffle_idx]
        train_x_o, train_y_o = train["occupancy_mask"][shuffle_idx], train["occupancy"][shuffle_idx] 
        
        
        aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss =\
            train_epoch(model, opt, criterion, train_x_f.to(device), train_y_f,\
                        train_x_o.to(device), train_y_o, hyper, flow_std_squ)
        opt_scheduler.step()
        
        total_phy_flow_occ_loss.append([aver_loss, aver_p_loss, aver_f_loss, aver_o_loss])
        print ("train loss for this epoch: ", round(aver_loss, 6))
        
        #evaluate
        b_s_vt = hyper["b_s_vt"]
        vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
            evaluate(model, vali_f, vali_o, vali_f_m, vali_o_m, f_o_mean_std, b_s_vt)
        test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
            evaluate(model, test_f, test_o, test_f_m, test_o_m, f_o_mean_std, b_s_vt)  
        
        r_vali_f_mae.append(vali_f_mae)
        r_test_f_mae.append(test_f_mae)
        r_vali_o_mae.append(vali_o_mae)
        r_test_o_mae.append(test_o_mae)
        r_vali_f_rmse.append(vali_f_rmse)
        r_test_f_rmse.append(test_f_rmse)
        r_vali_o_rmse.append(vali_o_rmse)
        r_test_o_rmse.append(test_o_rmse)
        
        visualize_train_loss(total_phy_flow_occ_loss)
        visualize_flow_loss(r_vali_f_mae, r_test_f_mae)
        visualize_occ_loss(r_vali_o_mae, r_test_o_mae)
        time_e = time.time()
        print ("time for this epoch", time_e - time_s)
        
        performance = {"train": total_phy_flow_occ_loss,\
                  "vali": [r_vali_f_mae, r_vali_f_rmse, r_vali_o_mae, r_vali_o_rmse],\
                  "test": [r_test_f_mae, r_test_f_rmse, r_test_o_mae, r_test_o_rmse]}
        subfile =  open(file_name + '/' + 'performance'+'.json','w')
        json.dump(performance, subfile)
        subfile.close()
        
        #early stop
        flow_std, occ_std = f_o_mean_std[1], f_o_mean_std[3]
        norm_f_rmse, norm_o_rmse = vali_f_rmse/flow_std, vali_o_rmse/occ_std
        norm_sum_mse = norm_f_rmse*norm_f_rmse + norm_o_rmse*norm_o_rmse
        
        if n_mse_flow_occ > 0:
            min_until_now = min([min_until_now, norm_sum_mse])
        else:
            min_until_now = 1000000.0  
        if norm_sum_mse > min_until_now:
            no_decrease = no_decrease+1
        else:
            no_decrease = 0
        if no_decrease == max_no_decrease:
            print ("Early stop at the " + str(i+1) + "-th epoch")
            return total_phy_flow_occ_loss, model 
        n_mse_flow_occ = n_mse_flow_occ + 1
        
        print ("No_decrease: ", no_decrease)
    return total_phy_flow_occ_loss, model    

5: prepare tensors¶

In [8]:
def tensorize(train_vali_test):
    result = dict()
    result["flow"] = torch.tensor(train_vali_test["flow"]) 
    result["flow_mask"] = torch.tensor(train_vali_test["flow_mask"])     
    result["occupancy"] = torch.tensor(train_vali_test["occupancy"]) 
    result["occupancy_mask"] = torch.tensor(train_vali_test["occupancy_mask"]) 
    return result

def normalize_flow_occ(tvt, f_o_mean_std):  #tvt: train, vali, test
    #flow
    f_mean, f_std = f_o_mean_std[0], f_o_mean_std[1]
    f_mask, f = tvt["flow_mask"], tvt["flow"]
    tvt["flow_mask"] = ((np.array(f_mask)-f_mean)/f_std).tolist()
    tvt["flow"] = ((np.array(f)-f_mean)/f_std).tolist()
    
    #occ
    o_mean, o_std = f_o_mean_std[2], f_o_mean_std[3]
    o_mask, o = tvt["occupancy_mask"], tvt["occupancy"]
    tvt["occupancy_mask"] = ((np.array(o_mask)-o_mean)/o_std).tolist()
    tvt["occupancy"] = ((np.array(o)-o_mean)/o_std).tolist()   
    return tvt

def transform_distance(d_matrix):
    sigma, n_row, n_col = np.std(d_matrix), len(d_matrix), len(d_matrix[0])
    sigma_square = sigma*sigma
    for i in range(n_row):
        for j in range(n_col):
            d_i_j = d_matrix[i][j]
            d_matrix[i][j] = np.exp(0.0-10000.0*d_i_j*d_i_j/sigma_square)
    return d_matrix

def load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path):
    mean_std = json.load(open(mean_std_path))
    f_mean, f_std, o_mean, o_std =\
        mean_std["f_mean"], mean_std["f_std"], mean_std["o_mean"], mean_std["o_std"]
    f_o_mean_std = [f_mean, f_std, o_mean, o_std]
    
    train = json.load(open(train_path))
    vali = json.load(open(vali_path))
    test = json.load(open(test_path))
    adj = json.load(open(sensor_adj_path))["adj"]
    n_sensor = len(train["flow"][0])    
    
    train = tensorize(normalize_flow_occ(train, f_o_mean_std))
    vali = tensorize(normalize_flow_occ(vali, f_o_mean_std))
    test = tensorize(normalize_flow_occ(test, f_o_mean_std))

    adj = torch.tensor(transform_distance(adj), device=device).float()   
    
    df_sensor_id = json.load(open(sensor_id_path))
    sensor_length = [0.0 for i in range(n_sensor)]
    for sensor in df_sensor_id:
        sensor_length[df_sensor_id[sensor][0]] = df_sensor_id[sensor][3]
        
    return train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length

6: main¶

In [9]:
#6.1 load the data
time1 = time.time()
train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length =\
    load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path)
time2 = time.time()
print (time2-time1)
13.775065660476685
In [10]:
print (len(train["flow"]))
print (len(vali["flow"]))
print (len(test["flow"]))
print (f_o_mean_std)
1536
499
500
[425.68492811748513, 254.84583261239152, 0.1814023556701015, 0.18315625109655478]
In [11]:
model = NMFD_GNN(n_sensor, M, hyper_model, f_o_mean_std, sensor_length, adj).to(device)   
cri = nn.MSELoss() 
In [12]:
#6.2: train the model
total_phy_flow_occ_loss, trained_model = train_process(model, cri, train, vali, test, hyper, f_o_mean_std)
# epochs  200
----------------an epoch starts-------------------
i_epoch:  0
# batch:  96
i_batch:  0.0
the loss for this batch:  1.9815644
flow loss 1.0990472
occ loss 0.6143677
time for this batch 0.8666303157806396
----------------------------------
i_batch:  64.0
the loss for this batch:  0.7851688
flow loss 0.28925544
occ loss 0.26341987
time for this batch 0.46655821800231934
----------------------------------
train loss for this epoch:  0.90803
time for this epoch 56.600130796432495
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  1
# batch:  96
i_batch:  0.0
the loss for this batch:  0.62810546
flow loss 0.22280373
occ loss 0.19019218
time for this batch 0.5049211978912354
----------------------------------
i_batch:  64.0
the loss for this batch:  0.5521626
flow loss 0.17682189
occ loss 0.16271259
time for this batch 0.482083797454834
----------------------------------
train loss for this epoch:  0.576578
time for this epoch 57.2423734664917
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  2
# batch:  96
i_batch:  0.0
the loss for this batch:  0.51837295
flow loss 0.17848775
occ loss 0.14021462
time for this batch 0.44369935989379883
----------------------------------
i_batch:  64.0
the loss for this batch:  0.5815691
flow loss 0.17008369
occ loss 0.20409842
time for this batch 0.4835498332977295
----------------------------------
train loss for this epoch:  0.519876
time for this epoch 56.98795461654663
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  3
# batch:  96
i_batch:  0.0
the loss for this batch:  0.46400177
flow loss 0.1502272
occ loss 0.1344724
time for this batch 0.3854334354400635
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4384641
flow loss 0.14810973
occ loss 0.11199102
time for this batch 0.4764838218688965
----------------------------------
train loss for this epoch:  0.494977
time for this epoch 56.59439134597778
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  4
# batch:  96
i_batch:  0.0
the loss for this batch:  0.45155627
flow loss 0.14722879
occ loss 0.12919635
time for this batch 0.4362785816192627
----------------------------------
i_batch:  64.0
the loss for this batch:  0.45014727
flow loss 0.13063583
occ loss 0.12899676
time for this batch 0.4861299991607666
----------------------------------
train loss for this epoch:  0.476882
time for this epoch 56.48940825462341
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  5
# batch:  96
i_batch:  0.0
the loss for this batch:  0.4881399
flow loss 0.12328085
occ loss 0.13712935
time for this batch 0.4240257740020752
----------------------------------
i_batch:  64.0
the loss for this batch:  0.41624188
flow loss 0.13219951
occ loss 0.11508801
time for this batch 0.37302088737487793
----------------------------------
train loss for this epoch:  0.467109
time for this epoch 56.74505114555359
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  6
# batch:  96
i_batch:  0.0
the loss for this batch:  0.41717216
flow loss 0.12227805
occ loss 0.117699414
time for this batch 0.473649263381958
----------------------------------
i_batch:  64.0
the loss for this batch:  0.43921518
flow loss 0.10907964
occ loss 0.1155334
time for this batch 0.4767031669616699
----------------------------------
train loss for this epoch:  0.459195
time for this epoch 56.73500633239746
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  7
# batch:  96
i_batch:  0.0
the loss for this batch:  0.45677063
flow loss 0.120371975
occ loss 0.1309238
time for this batch 0.45988893508911133
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4023338
flow loss 0.10447434
occ loss 0.112420246
time for this batch 0.4799065589904785
----------------------------------
train loss for this epoch:  0.451609
time for this epoch 56.56524610519409
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  8
# batch:  96
i_batch:  0.0
the loss for this batch:  0.40440255
flow loss 0.11971536
occ loss 0.10398293
time for this batch 0.47215843200683594
----------------------------------
i_batch:  64.0
the loss for this batch:  0.41753185
flow loss 0.1121397
occ loss 0.11227685
time for this batch 0.49344325065612793
----------------------------------
train loss for this epoch:  0.445805
time for this epoch 57.16133451461792
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  9
# batch:  96
i_batch:  0.0
the loss for this batch:  0.5015305
flow loss 0.1374961
occ loss 0.14968869
time for this batch 0.43357348442077637
----------------------------------
i_batch:  64.0
the loss for this batch:  0.45673352
flow loss 0.12625226
occ loss 0.12890202
time for this batch 0.5211546421051025
----------------------------------
train loss for this epoch:  0.442471
time for this epoch 56.98993992805481
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  10
# batch:  96
i_batch:  0.0
the loss for this batch:  0.42497647
flow loss 0.11038421
occ loss 0.12952758
time for this batch 0.4687023162841797
----------------------------------
i_batch:  64.0
the loss for this batch:  0.37421432
flow loss 0.100800306
occ loss 0.10845154
time for this batch 0.49625158309936523
----------------------------------
train loss for this epoch:  0.437229
time for this epoch 58.26401424407959
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  11
# batch:  96
i_batch:  0.0
the loss for this batch:  0.4526253
flow loss 0.11580003
occ loss 0.124270596
time for this batch 0.5035843849182129
----------------------------------
i_batch:  64.0
the loss for this batch:  0.41458207
flow loss 0.09952807
occ loss 0.11918813
time for this batch 0.44874095916748047
----------------------------------
train loss for this epoch:  0.437693
time for this epoch 57.39416003227234
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  12
# batch:  96
i_batch:  0.0
the loss for this batch:  0.41939002
flow loss 0.11180738
occ loss 0.109392464
time for this batch 0.44814205169677734
----------------------------------
i_batch:  64.0
the loss for this batch:  0.48002583
flow loss 0.11422182
occ loss 0.13683493
time for this batch 0.4520280361175537
----------------------------------
train loss for this epoch:  0.432869
time for this epoch 57.5204963684082
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  13
# batch:  96
i_batch:  0.0
the loss for this batch:  0.38461804
flow loss 0.10329538
occ loss 0.108999565
time for this batch 0.45218849182128906
----------------------------------
i_batch:  64.0
the loss for this batch:  0.45933563
flow loss 0.11341537
occ loss 0.12214761
time for this batch 0.4942970275878906
----------------------------------
train loss for this epoch:  0.432196
time for this epoch 56.69869017601013
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  14
# batch:  96
i_batch:  0.0
the loss for this batch:  0.40595698
flow loss 0.09306713
occ loss 0.121114634
time for this batch 0.4209275245666504
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3917174
flow loss 0.10045648
occ loss 0.098613
time for this batch 0.3889462947845459
----------------------------------
train loss for this epoch:  0.429507
time for this epoch 56.719913959503174
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  15
# batch:  96
i_batch:  0.0
the loss for this batch:  0.50059026
flow loss 0.13346553
occ loss 0.14473838
time for this batch 0.43703699111938477
----------------------------------
i_batch:  64.0
the loss for this batch:  0.41173792
flow loss 0.09790888
occ loss 0.1107974
time for this batch 0.5008053779602051
----------------------------------
train loss for this epoch:  0.428269
time for this epoch 58.0811653137207
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  16
# batch:  96
i_batch:  0.0
the loss for this batch:  0.4487177
flow loss 0.1217096
occ loss 0.13021946
time for this batch 0.47139453887939453
----------------------------------
i_batch:  64.0
the loss for this batch:  0.37587398
flow loss 0.09616328
occ loss 0.099565595
time for this batch 0.475430965423584
----------------------------------
train loss for this epoch:  0.425522
time for this epoch 57.16891145706177
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  17
# batch:  96
i_batch:  0.0
the loss for this batch:  0.39768016
flow loss 0.09809334
occ loss 0.115026265
time for this batch 0.38155078887939453
----------------------------------
i_batch:  64.0
the loss for this batch:  0.36976495
flow loss 0.10229407
occ loss 0.09544969
time for this batch 0.45798540115356445
----------------------------------
train loss for this epoch:  0.424612
time for this epoch 56.704729318618774
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  18
# batch:  96
i_batch:  0.0
the loss for this batch:  0.40041277
flow loss 0.100135855
occ loss 0.11654389
time for this batch 0.44441986083984375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3532248
flow loss 0.09398451
occ loss 0.09037387
time for this batch 0.4970591068267822
----------------------------------
train loss for this epoch:  0.424012
time for this epoch 57.04270911216736
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  19
# batch:  96
i_batch:  0.0
the loss for this batch:  0.40563178
flow loss 0.102993906
occ loss 0.10871507
time for this batch 0.4410436153411865
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3790532
flow loss 0.10664109
occ loss 0.10246803
time for this batch 0.4953880310058594
----------------------------------
train loss for this epoch:  0.418903
time for this epoch 56.91793918609619
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  20
# batch:  96
i_batch:  0.0
the loss for this batch:  0.46053943
flow loss 0.11645563
occ loss 0.13180608
time for this batch 0.4135167598724365
----------------------------------
i_batch:  64.0
the loss for this batch:  0.46676612
flow loss 0.111099504
occ loss 0.14263982
time for this batch 0.49423861503601074
----------------------------------
train loss for this epoch:  0.419397
time for this epoch 57.122206926345825
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  21
# batch:  96
i_batch:  0.0
the loss for this batch:  0.44766033
flow loss 0.10425908
occ loss 0.12036569
time for this batch 0.47788000106811523
----------------------------------
i_batch:  64.0
the loss for this batch:  0.36561
flow loss 0.09193448
occ loss 0.094230026
time for this batch 0.4685022830963135
----------------------------------
train loss for this epoch:  0.417096
time for this epoch 57.15723776817322
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  22
# batch:  96
i_batch:  0.0
the loss for this batch:  0.3960529
flow loss 0.11642271
occ loss 0.097140916
time for this batch 0.4330790042877197
----------------------------------
i_batch:  64.0
the loss for this batch:  0.32736588
flow loss 0.082311854
occ loss 0.07472456
time for this batch 0.503929615020752
----------------------------------
train loss for this epoch:  0.417185
time for this epoch 58.30890250205994
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  23
# batch:  96
i_batch:  0.0
the loss for this batch:  0.29416412
flow loss 0.07743814
occ loss 0.07481889
time for this batch 0.456514835357666
----------------------------------
i_batch:  64.0
the loss for this batch:  0.44421843
flow loss 0.10066111
occ loss 0.13487573
time for this batch 0.5155096054077148
----------------------------------
train loss for this epoch:  0.416196
time for this epoch 58.81370425224304
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  24
# batch:  96
i_batch:  0.0
the loss for this batch:  0.3819264
flow loss 0.09463833
occ loss 0.10117159
time for this batch 0.4426436424255371
----------------------------------
i_batch:  64.0
the loss for this batch:  0.37747034
flow loss 0.10364902
occ loss 0.09680381
time for this batch 0.47710180282592773
----------------------------------
train loss for this epoch:  0.41354
time for this epoch 57.96373701095581
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  25
# batch:  96
i_batch:  0.0
the loss for this batch:  0.41961646
flow loss 0.10611949
occ loss 0.1056734
time for this batch 0.46695661544799805
----------------------------------
i_batch:  64.0
the loss for this batch:  0.35486275
flow loss 0.088980764
occ loss 0.10239354
time for this batch 0.5151512622833252
----------------------------------
train loss for this epoch:  0.41402
time for this epoch 60.22601008415222
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  26
# batch:  96
i_batch:  0.0
the loss for this batch:  0.46689218
flow loss 0.102170326
occ loss 0.13366473
time for this batch 0.4547107219696045
----------------------------------
i_batch:  64.0
the loss for this batch:  0.43256992
flow loss 0.10371715
occ loss 0.1287532
time for this batch 0.45818090438842773
----------------------------------
train loss for this epoch:  0.412368
time for this epoch 59.83414816856384
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  27
# batch:  96
i_batch:  0.0
the loss for this batch:  0.42484623
flow loss 0.11783598
occ loss 0.10917192
time for this batch 0.4482707977294922
----------------------------------
i_batch:  64.0
the loss for this batch:  0.37923363
flow loss 0.095129766
occ loss 0.10827326
time for this batch 0.4892551898956299
----------------------------------
train loss for this epoch:  0.411682
time for this epoch 58.671732664108276
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  28
# batch:  96
i_batch:  0.0
the loss for this batch:  0.409368
flow loss 0.09869444
occ loss 0.11390136
time for this batch 0.46148180961608887
----------------------------------
i_batch:  64.0
the loss for this batch:  0.42496687
flow loss 0.10907376
occ loss 0.10124661
time for this batch 0.519437313079834
----------------------------------
train loss for this epoch:  0.41005
time for this epoch 57.84042167663574
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  29
# batch:  96
i_batch:  0.0
the loss for this batch:  0.42307884
flow loss 0.10574784
occ loss 0.10804686
time for this batch 0.37985754013061523
----------------------------------
i_batch:  64.0
the loss for this batch:  0.371911
flow loss 0.09575754
occ loss 0.08779247
time for this batch 0.494673490524292
----------------------------------
train loss for this epoch:  0.41097
time for this epoch 56.76196360588074
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  30
# batch:  96
i_batch:  0.0
the loss for this batch:  0.34918404
flow loss 0.08914035
occ loss 0.09551398
time for this batch 0.4486820697784424
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4072982
flow loss 0.090297215
occ loss 0.105377026
time for this batch 0.5140800476074219
----------------------------------
train loss for this epoch:  0.406818
time for this epoch 57.503493785858154
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  31
# batch:  96
i_batch:  0.0
the loss for this batch:  0.42254722
flow loss 0.11159819
occ loss 0.116882145
time for this batch 0.45699381828308105
----------------------------------
i_batch:  64.0
the loss for this batch:  0.38102853
flow loss 0.09489956
occ loss 0.09742557
time for this batch 0.5091052055358887
----------------------------------
train loss for this epoch:  0.40425
time for this epoch 58.36080598831177
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  32
# batch:  96
i_batch:  0.0
the loss for this batch:  0.44825068
flow loss 0.09335173
occ loss 0.124251954
time for this batch 0.47090792655944824
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4397977
flow loss 0.11403872
occ loss 0.116409406
time for this batch 0.4982187747955322
----------------------------------
train loss for this epoch:  0.402007
time for this epoch 57.02274537086487
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  33
# batch:  96
i_batch:  0.0
the loss for this batch:  0.3771222
flow loss 0.09125594
occ loss 0.10766024
time for this batch 0.44614505767822266
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4594447
flow loss 0.0996757
occ loss 0.1369935
time for this batch 0.4827277660369873
----------------------------------
train loss for this epoch:  0.398187
time for this epoch 56.94504451751709
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  34
# batch:  96
i_batch:  0.0
the loss for this batch:  0.4342507
flow loss 0.12288104
occ loss 0.1304513
time for this batch 0.43971776962280273
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4104241
flow loss 0.091517225
occ loss 0.10836525
time for this batch 0.5395078659057617
----------------------------------
train loss for this epoch:  0.393426
time for this epoch 57.39520287513733
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  35
# batch:  96
i_batch:  0.0
the loss for this batch:  0.46442944
flow loss 0.1164882
occ loss 0.145547
time for this batch 0.45943784713745117
----------------------------------
i_batch:  64.0
the loss for this batch:  0.37692225
flow loss 0.08171541
occ loss 0.09934143
time for this batch 0.48022985458374023
----------------------------------
train loss for this epoch:  0.38721
time for this epoch 56.67690563201904
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  36
# batch:  96
i_batch:  0.0
the loss for this batch:  0.3427189
flow loss 0.09798035
occ loss 0.0952055
time for this batch 0.4549384117126465
----------------------------------
i_batch:  64.0
the loss for this batch:  0.41036835
flow loss 0.11359605
occ loss 0.113046296
time for this batch 0.4846916198730469
----------------------------------
train loss for this epoch:  0.38233
time for this epoch 57.439910888671875
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  37
# batch:  96
i_batch:  0.0
the loss for this batch:  0.38860953
flow loss 0.09790813
occ loss 0.099632695
time for this batch 0.4678535461425781
----------------------------------
i_batch:  64.0
the loss for this batch:  0.35990876
flow loss 0.08968501
occ loss 0.110446446
time for this batch 0.48055028915405273
----------------------------------
train loss for this epoch:  0.368804
time for this epoch 55.91486930847168
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  38
# batch:  96
i_batch:  0.0
the loss for this batch:  0.33679757
flow loss 0.0888739
occ loss 0.09255929
time for this batch 0.4342324733734131
----------------------------------
i_batch:  64.0
the loss for this batch:  0.36533523
flow loss 0.09623773
occ loss 0.111288585
time for this batch 0.506514310836792
----------------------------------
train loss for this epoch:  0.356988
time for this epoch 57.38023543357849
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  39
# batch:  96
i_batch:  0.0
the loss for this batch:  0.30428326
flow loss 0.081718005
occ loss 0.07787413
time for this batch 0.43451476097106934
----------------------------------
i_batch:  64.0
the loss for this batch:  0.35058337
flow loss 0.103774294
occ loss 0.101473264
time for this batch 0.48511266708374023
----------------------------------
train loss for this epoch:  0.340387
time for this epoch 56.17021298408508
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  40
# batch:  96
i_batch:  0.0
the loss for this batch:  0.29709947
flow loss 0.06864549
occ loss 0.08598895
time for this batch 0.45409631729125977
----------------------------------
i_batch:  64.0
the loss for this batch:  0.330058
flow loss 0.07955239
occ loss 0.11494616
time for this batch 0.45813751220703125
----------------------------------
train loss for this epoch:  0.323378
time for this epoch 54.42401075363159
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  41
# batch:  96
i_batch:  0.0
the loss for this batch:  0.30967814
flow loss 0.08537856
occ loss 0.10548884
time for this batch 0.4891228675842285
----------------------------------
i_batch:  64.0
the loss for this batch:  0.31138682
flow loss 0.08362573
occ loss 0.110224776
time for this batch 0.46434855461120605
----------------------------------
train loss for this epoch:  0.299639
time for this epoch 57.17849898338318
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  42
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2293435
flow loss 0.06885545
occ loss 0.090316094
time for this batch 0.4408726692199707
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2549287
flow loss 0.07678229
occ loss 0.09380289
time for this batch 0.4701120853424072
----------------------------------
train loss for this epoch:  0.276589
time for this epoch 56.65172004699707
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  43
# batch:  96
i_batch:  0.0
the loss for this batch:  0.26807967
flow loss 0.08296997
occ loss 0.10264808
time for this batch 0.4392688274383545
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27827975
flow loss 0.085978046
occ loss 0.114176005
time for this batch 0.37773609161376953
----------------------------------
train loss for this epoch:  0.250583
time for this epoch 55.67051911354065
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  44
# batch:  96
i_batch:  0.0
the loss for this batch:  0.28988692
flow loss 0.0847115
occ loss 0.1292841
time for this batch 0.46034717559814453
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24618478
flow loss 0.07984559
occ loss 0.11801133
time for this batch 0.4771285057067871
----------------------------------
train loss for this epoch:  0.22595
time for this epoch 57.171910524368286
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  45
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19792622
flow loss 0.07039622
occ loss 0.08873201
time for this batch 0.43613314628601074
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15931503
flow loss 0.061350692
occ loss 0.0752038
time for this batch 0.46597790718078613
----------------------------------
train loss for this epoch:  0.205864
time for this epoch 57.172303199768066
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  46
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17574093
flow loss 0.06466974
occ loss 0.093312494
time for this batch 0.3650803565979004
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18421389
flow loss 0.06959682
occ loss 0.100628816
time for this batch 0.48705387115478516
----------------------------------
train loss for this epoch:  0.191208
time for this epoch 56.51705741882324
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  47
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19786349
flow loss 0.0684956
occ loss 0.11329075
time for this batch 0.44215822219848633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21038693
flow loss 0.0711813
occ loss 0.13042372
time for this batch 0.4729795455932617
----------------------------------
train loss for this epoch:  0.182046
time for this epoch 56.669657468795776
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  48
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16394475
flow loss 0.059930235
occ loss 0.096572615
time for this batch 0.47124266624450684
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19114932
flow loss 0.073599055
occ loss 0.11209632
time for this batch 0.49654054641723633
----------------------------------
train loss for this epoch:  0.174602
time for this epoch 57.09907126426697
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  49
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17088449
flow loss 0.06740076
occ loss 0.09892282
time for this batch 0.41875648498535156
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22656266
flow loss 0.077932075
occ loss 0.1461927
time for this batch 0.470184326171875
----------------------------------
train loss for this epoch:  0.171534
time for this epoch 56.55620312690735
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  50
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1428076
flow loss 0.060366612
occ loss 0.08066277
time for this batch 0.45896100997924805
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2833389
flow loss 0.123044565
occ loss 0.15755492
time for this batch 0.45763111114501953
----------------------------------
train loss for this epoch:  0.208377
time for this epoch 55.74706530570984
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  51
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17089728
flow loss 0.075031966
occ loss 0.094032966
time for this batch 0.4688386917114258
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16287541
flow loss 0.06979747
occ loss 0.09188579
time for this batch 0.4555394649505615
----------------------------------
train loss for this epoch:  0.181095
time for this epoch 55.33394145965576
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  52
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14237644
flow loss 0.062444765
occ loss 0.078261144
time for this batch 0.42099666595458984
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18765439
flow loss 0.07193016
occ loss 0.11448983
time for this batch 0.47904038429260254
----------------------------------
train loss for this epoch:  0.173269
time for this epoch 57.21428155899048
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  53
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14639401
flow loss 0.06078006
occ loss 0.08484088
time for this batch 0.4494805335998535
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16742866
flow loss 0.062452033
occ loss 0.10344128
time for this batch 0.5286417007446289
----------------------------------
train loss for this epoch:  0.172894
time for this epoch 57.46303200721741
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  54
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20473146
flow loss 0.0717112
occ loss 0.13184822
time for this batch 0.44265317916870117
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18087377
flow loss 0.068759404
occ loss 0.11120617
time for this batch 0.5041193962097168
----------------------------------
train loss for this epoch:  0.169849
time for this epoch 57.953922510147095
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  55
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16900903
flow loss 0.06625625
occ loss 0.10207065
time for this batch 0.3349134922027588
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19790557
flow loss 0.07253476
occ loss 0.12452545
time for this batch 0.480283260345459
----------------------------------
train loss for this epoch:  0.168769
time for this epoch 57.62197780609131
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  56
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16645174
flow loss 0.063629635
occ loss 0.10210735
time for this batch 0.4529547691345215
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1650283
flow loss 0.06856293
occ loss 0.0951004
time for this batch 0.45896339416503906
----------------------------------
train loss for this epoch:  0.168171
time for this epoch 56.59158539772034
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  57
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16866548
flow loss 0.06445403
occ loss 0.10328936
time for this batch 0.45575690269470215
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16972452
flow loss 0.06953925
occ loss 0.0990324
time for this batch 0.49000048637390137
----------------------------------
train loss for this epoch:  0.167159
time for this epoch 56.94491100311279
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  58
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14326268
flow loss 0.057386473
occ loss 0.08495182
time for this batch 0.5034477710723877
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16092189
flow loss 0.06238699
occ loss 0.09764191
time for this batch 0.5142927169799805
----------------------------------
train loss for this epoch:  0.16758
time for this epoch 59.13537669181824
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  59
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17510428
flow loss 0.06604984
occ loss 0.10813903
time for this batch 0.48776984214782715
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15720923
flow loss 0.06321975
occ loss 0.09241863
time for this batch 0.49738550186157227
----------------------------------
train loss for this epoch:  0.167178
time for this epoch 57.84379601478577
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  60
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14930384
flow loss 0.059290987
occ loss 0.089361094
time for this batch 0.46717262268066406
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16792078
flow loss 0.06330743
occ loss 0.10391903
time for this batch 0.5104665756225586
----------------------------------
train loss for this epoch:  0.165691
time for this epoch 58.642645835876465
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  61
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15876001
flow loss 0.06035288
occ loss 0.09768496
time for this batch 0.42600393295288086
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20849803
flow loss 0.071591035
occ loss 0.13569331
time for this batch 0.5122506618499756
----------------------------------
train loss for this epoch:  0.166259
time for this epoch 58.219282150268555
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  62
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16295478
flow loss 0.06106617
occ loss 0.10129363
time for this batch 0.4499802589416504
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16171934
flow loss 0.0625629
occ loss 0.09839725
time for this batch 0.41702818870544434
----------------------------------
train loss for this epoch:  0.165842
time for this epoch 58.26057410240173
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  63
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16997434
flow loss 0.0623505
occ loss 0.106937416
time for this batch 0.4203989505767822
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18153788
flow loss 0.065317035
occ loss 0.11526609
time for this batch 0.5060584545135498
----------------------------------
train loss for this epoch:  0.165406
time for this epoch 56.74946308135986
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  64
# batch:  96
i_batch:  0.0
the loss for this batch:  0.184158
flow loss 0.063978314
occ loss 0.11917093
time for this batch 0.46250271797180176
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14562623
flow loss 0.059979
occ loss 0.084549926
time for this batch 0.5010561943054199
----------------------------------
train loss for this epoch:  0.164737
time for this epoch 57.00438165664673
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  65
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12589265
flow loss 0.05377545
occ loss 0.07122317
time for this batch 0.4634981155395508
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14692923
flow loss 0.060727924
occ loss 0.08504384
time for this batch 0.47271728515625
----------------------------------
train loss for this epoch:  0.165114
time for this epoch 57.064369678497314
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  66
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17846623
flow loss 0.062074244
occ loss 0.11580289
time for this batch 0.45317959785461426
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18721661
flow loss 0.06966216
occ loss 0.11668047
time for this batch 0.42349839210510254
----------------------------------
train loss for this epoch:  0.164326
time for this epoch 57.09487462043762
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  67
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15935868
flow loss 0.062124245
occ loss 0.09650708
time for this batch 0.4172835350036621
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17151137
flow loss 0.066757984
occ loss 0.10382885
time for this batch 0.35741090774536133
----------------------------------
train loss for this epoch:  0.164416
time for this epoch 56.38816690444946
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  68
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17448127
flow loss 0.06358678
occ loss 0.10960779
time for this batch 0.4515829086303711
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16099973
flow loss 0.0632524
occ loss 0.09660716
time for this batch 0.4829061031341553
----------------------------------
train loss for this epoch:  0.163123
time for this epoch 57.00096917152405
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  69
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16798338
flow loss 0.063675605
occ loss 0.10315779
time for this batch 0.4755103588104248
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18213612
flow loss 0.06802641
occ loss 0.11329841
time for this batch 0.511145830154419
----------------------------------
train loss for this epoch:  0.163997
time for this epoch 57.557461738586426
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  70
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16620997
flow loss 0.060995094
occ loss 0.10417847
time for this batch 0.35910534858703613
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20417729
flow loss 0.06878644
occ loss 0.13472597
time for this batch 0.48972034454345703
----------------------------------
train loss for this epoch:  0.165252
time for this epoch 56.93786334991455
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  71
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18956366
flow loss 0.06561856
occ loss 0.12293041
time for this batch 0.4202275276184082
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14005043
flow loss 0.05528647
occ loss 0.08383574
time for this batch 0.5098414421081543
----------------------------------
train loss for this epoch:  0.164065
time for this epoch 59.500913858413696
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  72
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1651399
flow loss 0.06292015
occ loss 0.101597354
time for this batch 0.46848058700561523
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16473489
flow loss 0.06019748
occ loss 0.103829235
time for this batch 0.4980447292327881
----------------------------------
train loss for this epoch:  0.162236
time for this epoch 58.979766845703125
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  73
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1551562
flow loss 0.060430728
occ loss 0.09381386
time for this batch 0.4366271495819092
----------------------------------
i_batch:  64.0
the loss for this batch:  0.207159
flow loss 0.06718065
occ loss 0.13920923
time for this batch 0.4886767864227295
----------------------------------
train loss for this epoch:  0.162877
time for this epoch 56.99981331825256
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  74
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15198943
flow loss 0.057112116
occ loss 0.093876354
time for this batch 0.3632652759552002
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14739034
flow loss 0.05534512
occ loss 0.091243535
time for this batch 0.49224185943603516
----------------------------------
train loss for this epoch:  0.161518
time for this epoch 55.73976111412048
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  75
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16856128
flow loss 0.06406727
occ loss 0.10374503
time for this batch 0.45081400871276855
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1517569
flow loss 0.06090799
occ loss 0.090283126
time for this batch 0.4971909523010254
----------------------------------
train loss for this epoch:  0.162308
time for this epoch 58.051241874694824
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  76
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14166099
flow loss 0.06083123
occ loss 0.080026254
time for this batch 0.4494476318359375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1677501
flow loss 0.0614664
occ loss 0.10518044
time for this batch 0.4883701801300049
----------------------------------
train loss for this epoch:  0.16361
time for this epoch 57.99287986755371
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  77
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15977444
flow loss 0.062656164
occ loss 0.09638435
time for this batch 0.45237016677856445
----------------------------------
i_batch:  64.0
the loss for this batch:  0.092287436
flow loss 0.044855747
occ loss 0.046836227
time for this batch 0.4672055244445801
----------------------------------
train loss for this epoch:  0.161159
time for this epoch 57.1151123046875
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  78
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17100927
flow loss 0.06293436
occ loss 0.1071711
time for this batch 0.46407389640808105
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2179169
flow loss 0.09177256
occ loss 0.12501116
time for this batch 0.482928991317749
----------------------------------
train loss for this epoch:  0.168809
time for this epoch 57.661136627197266
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  79
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16424689
flow loss 0.06634416
occ loss 0.09712871
time for this batch 0.414722204208374
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1828493
flow loss 0.066890806
occ loss 0.11492169
time for this batch 0.4996683597564697
----------------------------------
train loss for this epoch:  0.163822
time for this epoch 58.307910680770874
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  80
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14813781
flow loss 0.056760103
occ loss 0.09057009
time for this batch 0.4371829032897949
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14118828
flow loss 0.05832357
occ loss 0.08185099
time for this batch 0.48531460762023926
----------------------------------
train loss for this epoch:  0.160934
time for this epoch 57.48443150520325
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  81
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15444846
flow loss 0.058809947
occ loss 0.09470076
time for this batch 0.442751407623291
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13921529
flow loss 0.057484493
occ loss 0.0806709
time for this batch 0.4849681854248047
----------------------------------
train loss for this epoch:  0.160389
time for this epoch 56.83347702026367
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  82
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15157185
flow loss 0.059637077
occ loss 0.09069639
time for this batch 0.3979012966156006
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12656179
flow loss 0.054665733
occ loss 0.07066437
time for this batch 0.4450836181640625
----------------------------------
train loss for this epoch:  0.160942
time for this epoch 56.5713529586792
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  83
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16451642
flow loss 0.061535735
occ loss 0.10234887
time for this batch 0.44217371940612793
----------------------------------
i_batch:  64.0
the loss for this batch:  0.10741943
flow loss 0.047859915
occ loss 0.059002567
time for this batch 0.4642512798309326
----------------------------------
train loss for this epoch:  0.161651
time for this epoch 56.25226044654846
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  84
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14487855
flow loss 0.0543521
occ loss 0.089553796
time for this batch 0.45827651023864746
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18278173
flow loss 0.06400986
occ loss 0.11776061
time for this batch 0.47780656814575195
----------------------------------
train loss for this epoch:  0.159069
time for this epoch 56.585936307907104
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  85
# batch:  96
i_batch:  0.0
the loss for this batch:  0.10593353
flow loss 0.049458157
occ loss 0.055602342
time for this batch 0.430591344833374
----------------------------------
i_batch:  64.0
the loss for this batch:  0.124664426
flow loss 0.05237726
occ loss 0.07138876
time for this batch 0.5040795803070068
----------------------------------
train loss for this epoch:  0.15894
time for this epoch 57.932048320770264
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  86
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14608681
flow loss 0.05721428
occ loss 0.08817356
time for this batch 0.34967875480651855
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19725312
flow loss 0.066106945
occ loss 0.13049625
time for this batch 0.5066368579864502
----------------------------------
train loss for this epoch:  0.160667
time for this epoch 57.198086738586426
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  87
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12624809
flow loss 0.05434687
occ loss 0.07093294
time for this batch 0.469038724899292
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15927063
flow loss 0.05976685
occ loss 0.09852124
time for this batch 0.47913098335266113
----------------------------------
train loss for this epoch:  0.158973
time for this epoch 57.24359154701233
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  88
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13549732
flow loss 0.055640582
occ loss 0.0788288
time for this batch 0.4405100345611572
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14300615
flow loss 0.0586505
occ loss 0.08380779
time for this batch 0.3932919502258301
----------------------------------
train loss for this epoch:  0.159793
time for this epoch 56.046892166137695
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  89
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13263316
flow loss 0.052975234
occ loss 0.07854462
time for this batch 0.44124794006347656
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1867388
flow loss 0.06747816
occ loss 0.11828478
time for this batch 0.5048551559448242
----------------------------------
train loss for this epoch:  0.158638
time for this epoch 57.45936417579651
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  90
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15023059
flow loss 0.056648728
occ loss 0.09276998
time for this batch 0.45366525650024414
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16531779
flow loss 0.05965275
occ loss 0.10492268
time for this batch 0.44420480728149414
----------------------------------
train loss for this epoch:  0.159817
time for this epoch 57.07018709182739
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  91
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18540736
flow loss 0.063485645
occ loss 0.12131392
time for this batch 0.47290992736816406
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13484168
flow loss 0.05363951
occ loss 0.07993434
time for this batch 0.502465009689331
----------------------------------
train loss for this epoch:  0.158088
time for this epoch 56.84256863594055
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  92
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16546276
flow loss 0.062683776
occ loss 0.101810515
time for this batch 0.4441978931427002
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15678002
flow loss 0.057858046
occ loss 0.098220915
time for this batch 0.4653968811035156
----------------------------------
train loss for this epoch:  0.159379
time for this epoch 56.27992343902588
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  93
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17639624
flow loss 0.06340785
occ loss 0.11230188
time for this batch 0.36490368843078613
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15846021
flow loss 0.058906607
occ loss 0.098815754
time for this batch 0.5048661231994629
----------------------------------
train loss for this epoch:  0.158723
time for this epoch 56.312355518341064
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  94
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15859027
flow loss 0.054821394
occ loss 0.10313759
time for this batch 0.43781399726867676
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18301748
flow loss 0.067438476
occ loss 0.114566855
time for this batch 0.5034055709838867
----------------------------------
train loss for this epoch:  0.157611
time for this epoch 56.5135555267334
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  95
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15965842
flow loss 0.061167754
occ loss 0.09772313
time for this batch 0.4513366222381592
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14544384
flow loss 0.058060247
occ loss 0.08635304
time for this batch 0.5201408863067627
----------------------------------
train loss for this epoch:  0.158023
time for this epoch 57.919347286224365
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  96
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16567686
flow loss 0.062200494
occ loss 0.102673545
time for this batch 0.43378376960754395
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1687215
flow loss 0.06049126
occ loss 0.107740685
time for this batch 0.43665575981140137
----------------------------------
train loss for this epoch:  0.158017
time for this epoch 57.63165354728699
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  97
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1518107
flow loss 0.05500412
occ loss 0.09583255
time for this batch 0.4722757339477539
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18757036
flow loss 0.0652224
occ loss 0.121607244
time for this batch 0.3050501346588135
----------------------------------
train loss for this epoch:  0.157322
time for this epoch 57.86514949798584
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  98
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14076038
flow loss 0.057656985
occ loss 0.08228671
time for this batch 0.4721841812133789
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15276384
flow loss 0.059886385
occ loss 0.09195231
time for this batch 0.4973745346069336
----------------------------------
train loss for this epoch:  0.157356
time for this epoch 56.73479509353638
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  99
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19427268
flow loss 0.06582773
occ loss 0.12770353
time for this batch 0.4646031856536865
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14351463
flow loss 0.05867084
occ loss 0.0835708
time for this batch 0.4566919803619385
----------------------------------
train loss for this epoch:  0.15735
time for this epoch 57.091628074645996
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  100
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14916697
flow loss 0.059164964
occ loss 0.08928162
time for this batch 0.44809699058532715
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15047716
flow loss 0.05956196
occ loss 0.090429395
time for this batch 0.4491119384765625
----------------------------------
train loss for this epoch:  0.156982
time for this epoch 58.54954552650452
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  101
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13235846
flow loss 0.053304307
occ loss 0.07808786
time for this batch 0.46100521087646484
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14960107
flow loss 0.054277085
occ loss 0.09466111
time for this batch 0.45705246925354004
----------------------------------
train loss for this epoch:  0.156617
time for this epoch 56.588552951812744
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  102
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13999432
flow loss 0.056790978
occ loss 0.08211697
time for this batch 0.5053050518035889
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16723812
flow loss 0.060332436
occ loss 0.106049225
time for this batch 0.48012351989746094
----------------------------------
train loss for this epoch:  0.156008
time for this epoch 57.66069173812866
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  103
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16420594
flow loss 0.058806907
occ loss 0.104724884
time for this batch 0.4810056686401367
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14653224
flow loss 0.05666993
occ loss 0.08887997
time for this batch 0.4731252193450928
----------------------------------
train loss for this epoch:  0.156305
time for this epoch 56.271113872528076
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  104
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16534367
flow loss 0.060000654
occ loss 0.10454603
time for this batch 0.4500772953033447
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13078958
flow loss 0.055756483
occ loss 0.07408799
time for this batch 0.4649341106414795
----------------------------------
train loss for this epoch:  0.157884
time for this epoch 56.26455640792847
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  105
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18537314
flow loss 0.07287472
occ loss 0.11114914
time for this batch 0.4620978832244873
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17300302
flow loss 0.06076697
occ loss 0.11144735
time for this batch 0.48777008056640625
----------------------------------
train loss for this epoch:  0.157914
time for this epoch 56.550880432128906
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  106
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1861492
flow loss 0.064636566
occ loss 0.12089533
time for this batch 0.43120908737182617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13659805
flow loss 0.053865224
occ loss 0.081748426
time for this batch 0.5223298072814941
----------------------------------
train loss for this epoch:  0.155441
time for this epoch 59.562175273895264
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  107
# batch:  96
i_batch:  0.0
the loss for this batch:  0.11833981
flow loss 0.051789228
occ loss 0.06593726
time for this batch 0.4761042594909668
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17321506
flow loss 0.061268013
occ loss 0.11087955
time for this batch 0.48718762397766113
----------------------------------
train loss for this epoch:  0.157239
time for this epoch 58.64956545829773
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  108
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12507223
flow loss 0.048974585
occ loss 0.07538375
time for this batch 0.5051686763763428
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16668752
flow loss 0.058186397
occ loss 0.10770346
time for this batch 0.5522987842559814
----------------------------------
train loss for this epoch:  0.155205
time for this epoch 58.41923499107361
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  109
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18447877
flow loss 0.06396446
occ loss 0.11945831
time for this batch 0.44652557373046875
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14049836
flow loss 0.054446436
occ loss 0.08503991
time for this batch 0.3970625400543213
----------------------------------
train loss for this epoch:  0.156353
time for this epoch 57.54139423370361
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  110
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19483674
flow loss 0.0669508
occ loss 0.12665315
time for this batch 0.44986391067504883
----------------------------------
i_batch:  64.0
the loss for this batch:  0.11170816
flow loss 0.044999026
occ loss 0.0657573
time for this batch 0.48705339431762695
----------------------------------
train loss for this epoch:  0.158306
time for this epoch 58.80824303627014
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  111
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16938204
flow loss 0.05848166
occ loss 0.110217355
time for this batch 0.4786968231201172
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16671339
flow loss 0.058077022
occ loss 0.107549034
time for this batch 0.4833977222442627
----------------------------------
train loss for this epoch:  0.15468
time for this epoch 57.45481324195862
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  112
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18923552
flow loss 0.06556541
occ loss 0.12292136
time for this batch 0.41150665283203125
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16858889
flow loss 0.061505552
occ loss 0.10637223
time for this batch 0.4665720462799072
----------------------------------
train loss for this epoch:  0.153924
time for this epoch 57.80531930923462
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  113
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1672737
flow loss 0.059145715
occ loss 0.10719332
time for this batch 0.47563672065734863
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14798276
flow loss 0.056332294
occ loss 0.09100913
time for this batch 0.41141295433044434
----------------------------------
train loss for this epoch:  0.155564
time for this epoch 48.02052187919617
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  114
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13051385
flow loss 0.05159614
occ loss 0.07752693
time for this batch 0.3946261405944824
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13498458
flow loss 0.054472137
occ loss 0.079710595
time for this batch 0.44052839279174805
----------------------------------
train loss for this epoch:  0.154688
time for this epoch 47.87200570106506
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  115
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16468288
flow loss 0.05823496
occ loss 0.10580814
time for this batch 0.39049363136291504
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18791206
flow loss 0.06419406
occ loss 0.12313473
time for this batch 0.41671061515808105
----------------------------------
train loss for this epoch:  0.155422
time for this epoch 48.83049559593201
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  116
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15067792
flow loss 0.05792886
occ loss 0.09161641
time for this batch 0.37308216094970703
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17988585
flow loss 0.06459574
occ loss 0.11446693
time for this batch 0.40114688873291016
----------------------------------
train loss for this epoch:  0.156041
time for this epoch 46.91168189048767
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  117
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13867481
flow loss 0.057609774
occ loss 0.08007535
time for this batch 0.3638455867767334
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16317712
flow loss 0.05741386
occ loss 0.1048155
time for this batch 0.42548418045043945
----------------------------------
train loss for this epoch:  0.155502
time for this epoch 50.67264676094055
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  118
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1370633
flow loss 0.051552564
occ loss 0.08465434
time for this batch 0.3566768169403076
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19451173
flow loss 0.063642986
occ loss 0.13010818
time for this batch 0.4012455940246582
----------------------------------
train loss for this epoch:  0.154724
time for this epoch 47.07816457748413
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  119
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15026748
flow loss 0.05623838
occ loss 0.093110345
time for this batch 0.366635799407959
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15122256
flow loss 0.05905317
occ loss 0.09123556
time for this batch 0.39893579483032227
----------------------------------
train loss for this epoch:  0.154577
time for this epoch 48.368045806884766
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  120
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13345711
flow loss 0.05163934
occ loss 0.081160046
time for this batch 0.36498022079467773
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18433917
flow loss 0.0640707
occ loss 0.119453646
time for this batch 0.40045666694641113
----------------------------------
train loss for this epoch:  0.153547
time for this epoch 46.65022826194763
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  121
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17450094
flow loss 0.05972431
occ loss 0.11416137
time for this batch 0.4234917163848877
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17500958
flow loss 0.0629132
occ loss 0.1112012
time for this batch 0.41067075729370117
----------------------------------
train loss for this epoch:  0.154484
time for this epoch 50.49652624130249
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  122
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15451689
flow loss 0.05552986
occ loss 0.098250516
time for this batch 0.3132059574127197
----------------------------------
i_batch:  64.0
the loss for this batch:  0.11892879
flow loss 0.0514266
occ loss 0.06645061
time for this batch 0.3972194194793701
----------------------------------
train loss for this epoch:  0.153562
time for this epoch 48.31332278251648
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  123
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15302289
flow loss 0.055392712
occ loss 0.09673949
time for this batch 0.40219902992248535
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14260595
flow loss 0.05883269
occ loss 0.083043784
time for this batch 0.43140339851379395
----------------------------------
train loss for this epoch:  0.154649
time for this epoch 47.95385146141052
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  124
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15461752
flow loss 0.05771293
occ loss 0.09592319
time for this batch 0.3458707332611084
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14006919
flow loss 0.05545568
occ loss 0.083789505
time for this batch 0.3985593318939209
----------------------------------
train loss for this epoch:  0.15497
time for this epoch 49.04661011695862
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  125
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15748465
flow loss 0.057813253
occ loss 0.09860149
time for this batch 0.3582022190093994
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15221055
flow loss 0.055622946
occ loss 0.09607126
time for this batch 0.4089348316192627
----------------------------------
train loss for this epoch:  0.152565
time for this epoch 46.71931433677673
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  126
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18489592
flow loss 0.06211215
occ loss 0.12202269
time for this batch 0.35335826873779297
----------------------------------
i_batch:  64.0
the loss for this batch:  0.122199915
flow loss 0.051151782
occ loss 0.070308276
time for this batch 0.43031764030456543
----------------------------------
train loss for this epoch:  0.152527
time for this epoch 49.8864963054657
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  127
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13992766
flow loss 0.05373134
occ loss 0.085244864
time for this batch 0.38612794876098633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1658553
flow loss 0.06242656
occ loss 0.10252008
time for this batch 0.4279508590698242
----------------------------------
train loss for this epoch:  0.152255
time for this epoch 45.70396280288696
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  128
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17398769
flow loss 0.060856543
occ loss 0.11241765
time for this batch 0.35540246963500977
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14206694
flow loss 0.054703098
occ loss 0.08631265
time for this batch 0.3490581512451172
----------------------------------
train loss for this epoch:  0.153187
time for this epoch 45.965306758880615
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  129
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12457954
flow loss 0.04966625
occ loss 0.074011214
time for this batch 0.34937119483947754
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13560843
flow loss 0.05084146
occ loss 0.08419414
time for this batch 0.4161250591278076
----------------------------------
train loss for this epoch:  0.152419
time for this epoch 44.744980335235596
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  130
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15702778
flow loss 0.06003109
occ loss 0.096167475
time for this batch 0.36261868476867676
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20105645
flow loss 0.06595293
occ loss 0.13424434
time for this batch 0.4024038314819336
----------------------------------
train loss for this epoch:  0.154265
time for this epoch 46.581308364868164
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  131
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1572772
flow loss 0.053096224
occ loss 0.10362933
time for this batch 0.3386714458465576
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13900036
flow loss 0.052440263
occ loss 0.08561021
time for this batch 0.4156210422515869
----------------------------------
train loss for this epoch:  0.152297
time for this epoch 45.91918873786926
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  132
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14202343
flow loss 0.05491642
occ loss 0.086413376
time for this batch 0.3616452217102051
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12497675
flow loss 0.052797098
occ loss 0.07155668
time for this batch 0.42595505714416504
----------------------------------
train loss for this epoch:  0.153291
time for this epoch 47.5648033618927
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  133
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14338739
flow loss 0.056214828
occ loss 0.08633478
time for this batch 0.38284969329833984
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16993776
flow loss 0.058323853
occ loss 0.11054952
time for this batch 0.4487266540527344
----------------------------------
train loss for this epoch:  0.152114
time for this epoch 49.170897006988525
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  134
# batch:  96
i_batch:  0.0
the loss for this batch:  0.11836463
flow loss 0.048329353
occ loss 0.069274575
time for this batch 0.40041589736938477
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15290795
flow loss 0.059467107
occ loss 0.092814535
time for this batch 0.4528694152832031
----------------------------------
train loss for this epoch:  0.150985
time for this epoch 48.54303431510925
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  135
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13076098
flow loss 0.052908782
occ loss 0.07704843
time for this batch 0.3589451313018799
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14984743
flow loss 0.054136496
occ loss 0.09492986
time for this batch 0.40830445289611816
----------------------------------
train loss for this epoch:  0.151186
time for this epoch 47.210108041763306
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  136
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14574954
flow loss 0.055200055
occ loss 0.08991081
time for this batch 0.36376523971557617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14664263
flow loss 0.05309029
occ loss 0.09279165
time for this batch 0.45195460319519043
----------------------------------
train loss for this epoch:  0.152535
time for this epoch 48.105342388153076
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  137
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1635921
flow loss 0.060272995
occ loss 0.10248759
time for this batch 0.36197519302368164
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16647352
flow loss 0.06292703
occ loss 0.10239473
time for this batch 0.4113931655883789
----------------------------------
train loss for this epoch:  0.151819
time for this epoch 47.677505016326904
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  138
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12628213
flow loss 0.052067984
occ loss 0.073415056
time for this batch 0.3546481132507324
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13715728
flow loss 0.05555606
occ loss 0.08074377
time for this batch 0.3591015338897705
----------------------------------
train loss for this epoch:  0.151063
time for this epoch 47.3203341960907
No_decrease:  14
----------------an epoch starts-------------------
i_epoch:  139
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17992924
flow loss 0.06535856
occ loss 0.11373387
time for this batch 0.336489200592041
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15050007
flow loss 0.05669841
occ loss 0.09291061
time for this batch 0.40776491165161133
----------------------------------
train loss for this epoch:  0.151929
time for this epoch 47.597182512283325
No_decrease:  15
----------------an epoch starts-------------------
i_epoch:  140
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13952245
flow loss 0.053656038
occ loss 0.084810324
time for this batch 0.3903827667236328
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13923351
flow loss 0.050905883
occ loss 0.0874183
time for this batch 0.4213593006134033
----------------------------------
train loss for this epoch:  0.151608
time for this epoch 47.09646940231323
No_decrease:  16
----------------an epoch starts-------------------
i_epoch:  141
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18668158
flow loss 0.063786834
occ loss 0.12200097
time for this batch 0.35979223251342773
----------------------------------
i_batch:  64.0
the loss for this batch:  0.10565785
flow loss 0.04816964
occ loss 0.056404248
time for this batch 0.40865182876586914
----------------------------------
train loss for this epoch:  0.156859
time for this epoch 48.576007604599
No_decrease:  17
----------------an epoch starts-------------------
i_epoch:  142
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14935142
flow loss 0.05667464
occ loss 0.09209566
time for this batch 0.33305788040161133
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19619286
flow loss 0.065082006
occ loss 0.13027397
time for this batch 0.41574692726135254
----------------------------------
train loss for this epoch:  0.150647
time for this epoch 45.6798152923584
No_decrease:  18
----------------an epoch starts-------------------
i_epoch:  143
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14176215
flow loss 0.057895783
occ loss 0.08273258
time for this batch 0.3360171318054199
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14100383
flow loss 0.05324586
occ loss 0.08659945
time for this batch 0.38860511779785156
----------------------------------
train loss for this epoch:  0.150796
time for this epoch 44.48101782798767
No_decrease:  19
----------------an epoch starts-------------------
i_epoch:  144
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14681338
flow loss 0.053628393
occ loss 0.09213174
time for this batch 0.351473331451416
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17259312
flow loss 0.059931103
occ loss 0.111193374
time for this batch 0.42259907722473145
----------------------------------
train loss for this epoch:  0.153897
time for this epoch 46.422839641571045
No_decrease:  20
----------------an epoch starts-------------------
i_epoch:  145
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15816486
flow loss 0.05908388
occ loss 0.0985028
time for this batch 0.35427021980285645
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17481837
flow loss 0.05901495
occ loss 0.11498694
time for this batch 0.4056968688964844
----------------------------------
train loss for this epoch:  0.151348
time for this epoch 45.05233645439148
No_decrease:  21
----------------an epoch starts-------------------
i_epoch:  146
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17571689
flow loss 0.06151125
occ loss 0.113386564
time for this batch 0.32569384574890137
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15512396
flow loss 0.0570137
occ loss 0.0973755
time for this batch 0.39374232292175293
----------------------------------
train loss for this epoch:  0.150965
time for this epoch 44.44206976890564
No_decrease:  22
----------------an epoch starts-------------------
i_epoch:  147
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13918348
flow loss 0.051807597
occ loss 0.08627266
time for this batch 0.3408956527709961
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16482882
flow loss 0.05910404
occ loss 0.10498573
time for this batch 0.3984107971191406
----------------------------------
train loss for this epoch:  0.1506
time for this epoch 44.464545011520386
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  148
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15252568
flow loss 0.056423515
occ loss 0.0955497
time for this batch 0.31807661056518555
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1380129
flow loss 0.05316061
occ loss 0.084080644
time for this batch 0.34289121627807617
----------------------------------
train loss for this epoch:  0.151224
time for this epoch 44.49526119232178
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  149
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15712528
flow loss 0.056774132
occ loss 0.09951973
time for this batch 0.37717747688293457
----------------------------------
i_batch:  64.0
the loss for this batch:  0.11470985
flow loss 0.04801199
occ loss 0.065984786
time for this batch 0.3366830348968506
----------------------------------
train loss for this epoch:  0.150286
time for this epoch 42.432223320007324
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  150
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13525255
flow loss 0.05034166
occ loss 0.083330154
time for this batch 0.3041973114013672
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16000932
flow loss 0.055902027
occ loss 0.103501864
time for this batch 0.40467023849487305
----------------------------------
train loss for this epoch:  0.14399
time for this epoch 43.29772448539734
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  151
# batch:  96
i_batch:  0.0
the loss for this batch:  0.119115435
flow loss 0.046671107
occ loss 0.07155427
time for this batch 0.2807340621948242
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17236264
flow loss 0.059009295
occ loss 0.11287566
time for this batch 0.4089009761810303
----------------------------------
train loss for this epoch:  0.142861
time for this epoch 44.51203727722168
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  152
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14487104
flow loss 0.052959364
occ loss 0.091104075
time for this batch 0.318615198135376
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16569407
flow loss 0.057993315
occ loss 0.10679295
time for this batch 0.402576208114624
----------------------------------
train loss for this epoch:  0.142624
time for this epoch 46.476269006729126
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  153
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1419286
flow loss 0.0541011
occ loss 0.087140664
time for this batch 0.41809701919555664
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14647597
flow loss 0.053992882
occ loss 0.09161444
time for this batch 0.4165318012237549
----------------------------------
train loss for this epoch:  0.142283
time for this epoch 45.97362184524536
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  154
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14990947
flow loss 0.056749374
occ loss 0.092292994
time for this batch 0.3493173122406006
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13403353
flow loss 0.050208155
occ loss 0.082989015
time for this batch 0.4082024097442627
----------------------------------
train loss for this epoch:  0.142172
time for this epoch 44.10191226005554
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  155
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15056187
flow loss 0.053331647
occ loss 0.09648338
time for this batch 0.3430440425872803
----------------------------------
i_batch:  64.0
the loss for this batch:  0.119852066
flow loss 0.043884348
occ loss 0.07532722
time for this batch 0.37058568000793457
----------------------------------
train loss for this epoch:  0.142788
time for this epoch 43.27220559120178
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  156
# batch:  96
i_batch:  0.0
the loss for this batch:  0.11878883
flow loss 0.04531774
occ loss 0.07291381
time for this batch 0.3214073181152344
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13807546
flow loss 0.04908952
occ loss 0.08839191
time for this batch 0.38690972328186035
----------------------------------
train loss for this epoch:  0.142351
time for this epoch 46.79589629173279
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  157
# batch:  96
i_batch:  0.0
the loss for this batch:  0.10662092
flow loss 0.04413603
occ loss 0.061737888
time for this batch 0.3975496292114258
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15389413
flow loss 0.051899318
occ loss 0.10134258
time for this batch 0.39633607864379883
----------------------------------
train loss for this epoch:  0.14184
time for this epoch 45.15284013748169
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  158
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14796619
flow loss 0.052553315
occ loss 0.09472123
time for this batch 0.3361663818359375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14693908
flow loss 0.052790217
occ loss 0.09373187
time for this batch 0.418168306350708
----------------------------------
train loss for this epoch:  0.141844
time for this epoch 45.08456516265869
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  159
# batch:  96
i_batch:  0.0
the loss for this batch:  0.121077865
flow loss 0.048329398
occ loss 0.07188809
time for this batch 0.3136286735534668
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15009521
flow loss 0.053725712
occ loss 0.09584291
time for this batch 0.39220476150512695
----------------------------------
train loss for this epoch:  0.141996
time for this epoch 43.71593260765076
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  160
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13424717
flow loss 0.050593134
occ loss 0.082915656
time for this batch 0.35251474380493164
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16208906
flow loss 0.05640122
occ loss 0.10488316
time for this batch 0.39744091033935547
----------------------------------
train loss for this epoch:  0.141748
time for this epoch 43.03426170349121
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  161
# batch:  96
i_batch:  0.0
the loss for this batch:  0.09105597
flow loss 0.0384658
occ loss 0.051910955
time for this batch 0.313692569732666
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14691666
flow loss 0.052876927
occ loss 0.09323935
time for this batch 0.3919260501861572
----------------------------------
train loss for this epoch:  0.141717
time for this epoch 42.86095666885376
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  162
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17524119
flow loss 0.0589931
occ loss 0.11523739
time for this batch 0.32852983474731445
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1539074
flow loss 0.052891947
occ loss 0.10043829
time for this batch 0.40260982513427734
----------------------------------
train loss for this epoch:  0.141742
time for this epoch 44.16611456871033
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  163
# batch:  96
i_batch:  0.0
the loss for this batch:  0.10873576
flow loss 0.047087256
occ loss 0.061012186
time for this batch 0.32584071159362793
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1539353
flow loss 0.05444759
occ loss 0.09883302
time for this batch 0.3945794105529785
----------------------------------
train loss for this epoch:  0.141427
time for this epoch 43.218876361846924
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  164
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14678064
flow loss 0.057017945
occ loss 0.08901357
time for this batch 0.3404099941253662
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1364549
flow loss 0.05060737
occ loss 0.08543856
time for this batch 0.3892521858215332
----------------------------------
train loss for this epoch:  0.141712
time for this epoch 43.60564136505127
No_decrease:  14
----------------an epoch starts-------------------
i_epoch:  165
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15357776
flow loss 0.054088585
occ loss 0.098700136
time for this batch 0.3397200107574463
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14994733
flow loss 0.05385536
occ loss 0.09544
time for this batch 0.3752717971801758
----------------------------------
train loss for this epoch:  0.141566
time for this epoch 43.63991117477417
No_decrease:  15
----------------an epoch starts-------------------
i_epoch:  166
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14571331
flow loss 0.054852165
occ loss 0.09009856
time for this batch 0.33571481704711914
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1262259
flow loss 0.050042804
occ loss 0.07549671
time for this batch 0.40669846534729004
----------------------------------
train loss for this epoch:  0.141535
time for this epoch 44.236571073532104
No_decrease:  16
----------------an epoch starts-------------------
i_epoch:  167
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14112541
flow loss 0.051998157
occ loss 0.08823089
time for this batch 0.32179975509643555
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16088659
flow loss 0.05332528
occ loss 0.10695385
time for this batch 0.39508557319641113
----------------------------------
train loss for this epoch:  0.141267
time for this epoch 44.42910385131836
No_decrease:  17
----------------an epoch starts-------------------
i_epoch:  168
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13617744
flow loss 0.048776582
occ loss 0.08660188
time for this batch 0.33379602432250977
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1434192
flow loss 0.053732745
occ loss 0.08906519
time for this batch 0.3860177993774414
----------------------------------
train loss for this epoch:  0.141615
time for this epoch 44.222777128219604
No_decrease:  18
----------------an epoch starts-------------------
i_epoch:  169
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15183434
flow loss 0.054702044
occ loss 0.09632134
time for this batch 0.3129267692565918
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13343075
flow loss 0.049736973
occ loss 0.08287299
time for this batch 0.40874600410461426
----------------------------------
train loss for this epoch:  0.141255
time for this epoch 43.705711364746094
No_decrease:  19
----------------an epoch starts-------------------
i_epoch:  170
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1699607
flow loss 0.05432343
occ loss 0.11509193
time for this batch 0.33577537536621094
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16536596
flow loss 0.053339954
occ loss 0.111321665
time for this batch 0.39998626708984375
----------------------------------
train loss for this epoch:  0.141081
time for this epoch 44.52143979072571
No_decrease:  20
----------------an epoch starts-------------------
i_epoch:  171
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13149838
flow loss 0.048763994
occ loss 0.08193328
time for this batch 0.3185093402862549
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16234376
flow loss 0.056190427
occ loss 0.10555648
time for this batch 0.3773157596588135
----------------------------------
train loss for this epoch:  0.141621
time for this epoch 43.72608017921448
No_decrease:  21
----------------an epoch starts-------------------
i_epoch:  172
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15257734
flow loss 0.05323332
occ loss 0.09852559
time for this batch 0.3616597652435303
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12217169
flow loss 0.045823388
occ loss 0.075674236
time for this batch 0.39092326164245605
----------------------------------
train loss for this epoch:  0.141368
time for this epoch 43.08835697174072
No_decrease:  22
----------------an epoch starts-------------------
i_epoch:  173
# batch:  96
i_batch:  0.0
the loss for this batch:  0.11460625
flow loss 0.04512583
occ loss 0.06893029
time for this batch 0.42139458656311035
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14025128
flow loss 0.052543856
occ loss 0.08698616
time for this batch 0.40236711502075195
----------------------------------
train loss for this epoch:  0.141103
time for this epoch 45.26199388504028
No_decrease:  23
----------------an epoch starts-------------------
i_epoch:  174
# batch:  96
i_batch:  0.0
the loss for this batch:  0.10978671
flow loss 0.045810986
occ loss 0.063420765
time for this batch 0.33051156997680664
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13279507
flow loss 0.049355138
occ loss 0.08272226
time for this batch 0.4046497344970703
----------------------------------
train loss for this epoch:  0.141323
time for this epoch 43.94328308105469
No_decrease:  24
----------------an epoch starts-------------------
i_epoch:  175
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15780497
flow loss 0.05602229
occ loss 0.10114216
time for this batch 0.3114643096923828
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13358983
flow loss 0.052735936
occ loss 0.08033185
time for this batch 0.3922545909881592
----------------------------------
train loss for this epoch:  0.141208
time for this epoch 44.878921031951904
No_decrease:  25
----------------an epoch starts-------------------
i_epoch:  176
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15401474
flow loss 0.054444663
occ loss 0.09880937
time for this batch 0.3364086151123047
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14160213
flow loss 0.051741816
occ loss 0.08890241
time for this batch 0.4008657932281494
----------------------------------
train loss for this epoch:  0.140736
time for this epoch 44.7962908744812
No_decrease:  26
----------------an epoch starts-------------------
i_epoch:  177
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14342095
flow loss 0.05399645
occ loss 0.08857188
time for this batch 0.33945751190185547
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14448155
flow loss 0.05036024
occ loss 0.093426645
time for this batch 0.4037461280822754
----------------------------------
train loss for this epoch:  0.140594
time for this epoch 45.22239637374878
No_decrease:  27
----------------an epoch starts-------------------
i_epoch:  178
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14170152
flow loss 0.051143885
occ loss 0.08980846
time for this batch 0.31815242767333984
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13293979
flow loss 0.047689162
occ loss 0.08425216
time for this batch 0.38399338722229004
----------------------------------
train loss for this epoch:  0.141309
time for this epoch 44.28966951370239
No_decrease:  28
----------------an epoch starts-------------------
i_epoch:  179
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13492636
flow loss 0.050229613
occ loss 0.083952494
time for this batch 0.3354008197784424
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15545292
flow loss 0.05598579
occ loss 0.09883411
time for this batch 0.33438634872436523
----------------------------------
train loss for this epoch:  0.141057
time for this epoch 42.8647723197937
No_decrease:  29
----------------an epoch starts-------------------
i_epoch:  180
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16261914
flow loss 0.05577989
occ loss 0.106149584
time for this batch 0.3153853416442871
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12577985
flow loss 0.047966644
occ loss 0.077325545
time for this batch 0.40001702308654785
----------------------------------
train loss for this epoch:  0.140744
time for this epoch 43.76629972457886
Early stop at the 181-th epoch

7: apply the model to vali and test¶

In [13]:
def apply_to_vali_test(model, vt, f_o_mean_std):
    f = vt["flow"]
    f_m = vt["flow_mask"].to(device)
    o = vt["occupancy"]
    o_m = vt["occupancy_mask"].to(device)
    f_mae, f_rmse, o_mae, o_rmse  = vali_test(model, f, f_m, o, o_m, f_o_mean_std, hyper["b_s_vt"])
    print ("flow_mae", f_mae)
    print ("flow_rmse", f_rmse)
    print ("occ_mae", o_mae)
    print ("occ_rmse", o_rmse)
    return f_mae, f_rmse, o_mae, o_rmse

Validate¶

In [14]:
vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
    apply_to_vali_test(trained_model, vali, f_o_mean_std)
flow_mae 40.658161587428744
flow_rmse 66.91882702557015
occ_mae 0.03481488251547097
occ_rmse 0.06842147850910055

Test¶

In [15]:
test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
    apply_to_vali_test(trained_model, test, f_o_mean_std)
flow_mae 39.388068859146735
flow_rmse 64.68828417712056
occ_mae 0.03058539743703016
occ_rmse 0.06111997924327654
In [ ]:
 
In [ ]:
 
In [ ]: